Importing the libraries¶

In [3]:
import segyio ## library for reading and manipulating segy files 
import matplotlib.pyplot as plt ## library for plotting
import numpy as np ## library for arrays
from scipy import ndimage as ndi ## library for scientific calc and image processing
from shutil import copyfile  ## for files management
from skimage import exposure ## image processing
import pandas as pd ## arrays, data handling
import openpyxl ## excel file rader/write

Importing the segy file and converting it into DataFrame (Pandas)¶

In [4]:
filename='C:/Users/abdul/Downloads/seismic.segy'  # write path where seismic.segy file is downloaded
f=segyio.open(filename, 'r+', strict=False, ignore_geometry=True) ## opening segy file for reading
In [5]:
print(type(filename))
print(type(f))
<class 'str'>
<class 'segyio.segy.SegyFile'>
In [20]:
frames = [pd.DataFrame(f.trace[i] for i in range(0,120120,1))] ## getting all 1001 shots data, 1001*120 =120120 traces
print(type(frames))
result = pd.concat(frames)
print(type(result))
# result.to_excel('C:/Users/abdul/Downloads/ejaz1.xlsx', index=False)
<class 'list'>
<class 'pandas.core.frame.DataFrame'>
In [21]:
sg2=result.T ## transpose of data (1500 rows by 120120 columns (traces))
In [238]:
sg2 = pd.DataFrame(sg2.astype('float32')) # converting data to float64 type
In [239]:
print((sg2.shape))
(1500, 120120)

handling missing shots¶

In [240]:
mis = [80,81,88,89,90,449,450,451,759,760,761] ## 1012 shots were recorded, of which 11 got missed
slistc =list(range(1,1013)) 
slist=[0]*len(slistc)
z=1
for i in slistc:
    if i not in mis:
        slist[i-1]=z ## list containing the position of missed shots
        z+=1

basic geometry and parameters¶

In [241]:
so, dx, nt, fold = 262, 25, 120, 60 ## source offset to first trace, trace interval, no. of trace per shot, trace per CMP gather
fs = 250  # sampling frequency
dt = 1 / fs  # sampling interval
td = 1500 * dt # total duration
df = 1 / td ## frequency interval
dw = 2 * np.pi * df ## frequency interval in rad/s
t = np.arange(0, td, dt) ## time vector

Function for extracting the 60 fold CMP gather from the 60 shot data¶

In [242]:
def extsg(sg2, k): ## sg2 is whole data set. k = shot number from 1001 shots 
    
    idx = slist.index(k)
    offsets = np.array([])
    i=0
    j=np.array([])
    m=0
    z=0
    while i < fold:
        if slist[i+idx]==0: ## if shot is missed then ignore it and continue checking for correct data 
            i+=1
            z += -2
            # print('mis')
            continue
        offsets=np.append(offsets,np.array([so + 2*i*dx]))  ## offsets to each receiver considering missed shots
        j = np.append(j, np.array([k*nt-1 + m*118 +z])) ## index for picked trace from sg2 to form a cmp gather
        m += 1
        i += 1
        
    sg = pd.DataFrame(0.0, index=range(1500), columns=range(len(j))) 
    sg.iloc[:,0:len(j)] = sg2.iloc[:,j] ## extracted cmp gather from whole data 
    return sg, offsets
    

Functions for NMO correction¶

In [269]:
def nmo_correction(sgn, dt, offsets, vnmo): ## here velocities is single value for doing semblance analysis.
    nmo = np.zeros_like(sgn)
    nsamples = 1500 
    times = np.arange(0, nsamples*dt, dt)
    for i, t0 in enumerate(times):
        for j, s0 in enumerate(offsets):
            if t0*vnmo<=8:
                break
            tt = reflection_time(t0, s0, vnmo) ## reflection time calculated using t0, s0, and nmo velocity
            amplitude = sample_trace(sgn[:, j], tt, dt) 
            if amplitude is not None: 
                nmo[i, j] = amplitude 
    return nmo
In [247]:
# def reflection_time(t0, x, vnmo): 
#     t = np.sqrt(t0**2 + x**2/vnmo**2) 
#     return t

Modified reflection time to counter differential elevation in source and receivers¶

In [244]:
def reflection_time(t0,s0,vnmo):
    xd = 4*s0/(vnmo*t0-4)
    s1= np.sqrt((vnmo*t0/2)**2+((s0+xd)/2)**2)
    s2= np.sqrt((vnmo*t0/2-4)**2 + ((s0-xd)/2)**2)
    t= (s1+s2)/vnmo
    return t
In [236]:
reflection_time(.4,262,1500)
# sample_trace(sgn[:, 1], ttt, dt)
Out[236]:
0.4340302088820802
In [245]:
from scipy.interpolate import CubicSpline 

def sample_trace(trace, time, dt): 
    before = int(np.floor(time/dt)) 
    N = trace.size 
    samples = np.arange(before-1, before + 3) 
    if any(samples < 0) or any(samples >= N): 
        amplitude = None 
    else: 
        times = dt*samples 
        amps = trace[samples] 
        interpolator = CubicSpline(times, amps) 
        amplitude = interpolator(time) # amplitude is ndarray of size 1 and shape == (). it is a scalar. cubicpline is used to interpolate value
        ## of trace amplitude corresponding to reflection time.
        amplitude = amplitude[()] # converting ndarray to numpy.float64
    return amplitude

Balancing the traces to rms level¶

In [246]:
def balance_trace(trace):
    """
    Balance a single seismic trace to a common rms level.
    """
    # Calculate the rms value of the trace
    rms_value = np.sqrt(np.mean(trace ** 2))

    # Scale the trace to the common rms level (e.g., 1.0)
    balanced_trace = trace / rms_value

    return balanced_trace

def balance_seismic_data(seismic_data):
    """
    Balance all traces in a seismic data set to a common rms level.
    """
    balanced_seismic_data = np.zeros_like(seismic_data)

    # Balance each trace individually
    for i in range(seismic_data.shape[1]):
        balanced_seismic_data[:, i] = balance_trace(seismic_data[:, i])

    return balanced_seismic_data

Function for semblance coefficient¶

In [247]:
def sem(nmc):
    # k = 5
    seb = np.zeros((1500))
    for k in range(5,1495,1):
        sumnum=0
        sumden=0
        for j in range(k-5,k+6,1):
            sumnum += (np.sum(nmc[j,:]))**2
            sumden += np.sum(nmc[j,:]**2)
        if sumden != 0:
            seb[k] = sumnum/sumden/nmc.shape[1]
    return seb
In [268]:
def f_nmo_correction(sgn, dt, offsets, velocities):  ## here velocities is a vector containing velocity function with time.
    # nmo = pd.DataFrame(0, index=range(sz[0]), columns=range(sz[1]))
    nmo = np.zeros_like(sgn)
    # print(nmo.iloc[1499,59])
    nsamples = 1500 #cmp.shape[0] 
    times = np.arange(0, nsamples*dt, dt)
    for i, t0 in enumerate(times):
        for j, s0 in enumerate(offsets):
            if t0*velocities[i]<=8:
                # print('t0 = ', t0, ',vnmo= ', velocities, ',offset= ', x)
                break
            tt = reflection_time(t0, s0, velocities[i])
            # print('t0 = ', t0, ',vnmo= ', velocities, ',offset= ', x, ', ref time = ', tt, ',xd = ', xd)
            amplitude = sample_trace(sgn[:, j], tt, dt) 
            if amplitude is not None: 
                nmo[i, j] = amplitude 
    return nmo

CMP stack analysis (20 CMPs) Old analysis¶

In [ ]:
file_semb = 'C:/Users/abdul/Downloads/Ejaz/sembs.xlsx' ## path where semblance analysis matrices are saved (sembs.xlsx). Check 
## VikingAT_CMPstack jupyter notebook
file_sgs = 'C:/Users/abdul/Downloads/Ejaz/sgs.xlsx' ## path where cmp gathers are saved (sgs.xlsx)
cmp_stack=[]
vel = np.arange(350,4501,25)

for i in range(1,942,49):
    print(i)
    nsheet_name=f'sheet_{i}'

    sg, offsets = extsg(sg2,i)
    # sg = pd.read_excel(file_sgs, sheet_name=nsheet_name)

    sg=sg.values
    # print('sg done')
    # print(sg[2,3])
    sz = sg.shape
    sgn = balance_seismic_data(sg) ## balancing the cmp gather


### Plotting normalized cmp gather
    
    sn2 = np.zeros_like(sgn)
    for k in range(sz[1]):
        sn2[:,k] = sgn[:,k] + k*5.5


    fig, ax1 = plt.subplots()
    ax1.plot(sn2[:, :sz[1]], t, 'k')
    ax1.invert_yaxis()  # Reverse the y-axis

    # Extract x-tick positions
    xtick_positions = np.mean(sn2[:, list(range(0,sz[1],8))], axis=0) 
    # Set x-tick marks and labels for the bottom axes
    ax1.set_xticks(xtick_positions)
    ax1.set_xticklabels(list(range(1,sz[1]+1,8)))

    ax2 = ax1.twiny()

    # Set x-tick marks and labels for the top axes
    xtick_positions_top = np.mean(sn2[:, list(range(0,sz[1],8))], axis=0) 
    ax2.set_xticks(xtick_positions_top)
    ax2.set_xticklabels(offsets.tolist()[0:sz[1]:8])
    
    ax2.set_xlim(ax1.get_xlim())
    
    ax1.set_xlabel('Trace number')
    ax2.set_xlabel('Offset from source (m)')
    
    # Set label for y-axis
    ax1.set_ylabel('Time (s)')
    plt.title('CMP Gather')

    plt.show()
    

### reading semblance values
    semb = pd.read_excel(file_semb, sheet_name=nsheet_name, header=None)
    semb=semb.values

### Plotting semblance contour

    fig, ax3 = plt.subplots()
    contour = ax3.contourf(vel, t, semb / np.max(semb), cmap='magma')
    ax3.invert_yaxis()  # Reverse the y-axis
    ax3.set_ylabel('Time (s)')
    ax3.set_xlabel('Velocity (m/s)')
    
    plt.title('Semblance Contour Plot')
    
    # Add colorbar using the result of contourf as the mappable
    cbar = plt.colorbar(contour, ax=ax3, label='Semblance')
    
    # Show the plot
    plt.show()

#### Extracing and plotting velocity function with time (from semblance analysis/contour plot)
    mxi = np.argmax(semb, axis=1)
    # Find the maximum values in each row
    mxv = semb[np.arange(len(semb)), mxi]
    velm = vel[mxi] # velocity function

    plt.plot(velm,t)
    plt.gca().invert_yaxis()
    # Add labels and title
    plt.xlabel('Velocity')
    plt.ylabel('Time')
    plt.title('Velocity Function')
    plt.show()


### Final NMO correction using velocity function
    final_nmc = f_nmo_correction(sgn, dt, offsets, velm)

    
### Plotting final NMO corrected CMP gather
    sn3 = np.zeros_like(final_nmc)
    for k in range(sz[1]):
        sn3[:,k] = final_nmc[:,k]+k*5.5

    fig, ax4 = plt.subplots()
    ax4.plot(sn3[:, :sz[1]], t, 'k')
    ax4.invert_yaxis()  # Reverse the y-axis

    # Extract x-tick positions
    xtick_positions = np.mean(sn3[:, list(range(0,sz[1],8))], axis=0) #s2[:, [0, 9, 19, 29, 39, 49, 59]].mean().values
    # Set x-tick marks and labels for the bottom axes
    ax4.set_xticks(xtick_positions)
    ax4.set_xticklabels(list(range(1,sz[1]+1,8)))

    ax5 = ax4.twiny()

    # Set x-tick marks and labels for the top axes
    xtick_positions_top = np.mean(sn3[:, list(range(0,sz[1],8))], axis=0) #s2[:, [0, 9, 19, 29, 39, 49, 59]].mean().values
    ax5.set_xticks(xtick_positions_top)
    ax5.set_xticklabels(offsets.tolist()[0:sz[1]:8])
    
    ax5.set_xlim(ax1.get_xlim())
    
    ax4.set_xlabel('Trace number')
    ax5.set_xlabel('Offset from source (m)')
    
    # Set label for y-axis
    ax4.set_ylabel('Time (s)')

    plt.title('Final NMO Corrected CMP Gather')
    plt.show()


### CMP Stacking 
    stack = np.sum(final_nmc, axis=1, keepdims=True)
    stack_bal = balance_seismic_data(stack)
    cmp_stack.append(stack_bal)

CMP stack analysis (20 CMPs) Updated¶

In [281]:
file_semb = 'C:/Users/abdul/Downloads/Ejaz/sembs.xlsx' ## path where semblance analysis matrices are saved (sembs.xlsx). Check 
## VikingAT_CMPstack jupyter notebook
file_sgs = 'C:/Users/abdul/Downloads/Ejaz/sgs.xlsx' ## path where cmp gathers are saved (sgs.xlsx)
cmp_stack=[]
vel = np.arange(350,4501,25)

for i in range(1,942,49):
    print(i)
    nsheet_name=f'sheet_{i}'

    sg, offsets = extsg(sg2,i)
    offsets=offsets.astype(int)
    # sg = pd.read_excel(file_sgs, sheet_name=nsheet_name)

    sg=sg.values
    # print('sg done')
    # print(sg[2,3])
    sz = sg.shape
    sgn = balance_seismic_data(sg) ## balancing the cmp gather


### Plotting normalized cmp gather
    
    sn2 = np.zeros_like(sgn)
    for k in range(sz[1]):
        sn2[:,k] = sgn[:,k] + k*5.5


    fig1, ax1 = plt.subplots(figsize=(1.5,1.5),dpi=400)
    ax1.plot(sn2[:, :sz[1]], t, 'k',linewidth=0.15)
    ax1.invert_yaxis()  # Reverse the y-axis
    ax1.tick_params(axis='both', which='major', labelsize=3, width=0.2, length=1.4,pad=.6)

    for axis in ['top','bottom','left','right']:
        ax1.spines[axis].set_linewidth(0.5)


    # Extract x-tick positions
    xtick_positions = np.mean(sn2[:, list(range(0,sz[1],8))], axis=0) 
    # Set x-tick marks and labels for the bottom axes
    ax1.set_xticks(xtick_positions)
    ax1.set_xticklabels(list(range(1,sz[1]+1,8)))
    

    ax2 = ax1.twiny()

    # Set x-tick marks and labels for the top axes
    xtick_positions_top = np.mean(sn2[:, list(range(0,sz[1],8))], axis=0) 
    ax2.set_xticks(xtick_positions_top)
    ax2.set_xticklabels(offsets.tolist()[0:sz[1]:8])
    
    ax2.set_xlim(ax1.get_xlim())
    ax2.tick_params(axis='both', which='major', labelsize=2.5, width=0.2, length=1.4,pad=.6)

    for axis in ['top','bottom','left','right']:
        ax2.spines[axis].set_linewidth(0.5)
    
    ax1.set_xlabel('Trace number', fontsize=3,labelpad=.9)
    ax2.set_xlabel('Offset from source (m)', fontsize=3,labelpad=.9)
    
    # Set label for y-axis
    ax1.set_ylabel('Time (s)', fontsize=3,labelpad=.9)
    plt.title('CMP Gather', fontsize=4,pad=1)

    plt.show()
    

### reading semblance values
    semb = pd.read_excel(file_semb, sheet_name=nsheet_name, header=None)
    semb=semb.values

### Plotting semblance contour

    fig2, ax3 = plt.subplots(figsize=(1.8, 1.5),dpi=400)  # Set DPI here
    
    contour = ax3.contourf(vel, t, semb/np.max(semb), cmap='magma')
    ax3.set_ylim(0, 6)
    ax3.invert_yaxis()  # Reverse the y-axis
    ax3.tick_params(axis='both', which='major', labelsize=3, width=0.2, length=1.4,pad=.6)
    

    for axis in ['top','bottom','left','right']:
        ax3.spines[axis].set_linewidth(0.5)
    
    ax3.set_ylabel('Time (s)', fontsize=3,labelpad=.9)
    ax3.set_xlabel('Velocity (m/s)', fontsize=3,labelpad=.9)
    plt.title('Semblance Contour Plot and Vel Func', fontsize=4,pad=1.2)
    
    # Add colorbar using the result of contourf as the mappable
    cbar = plt.colorbar(contour, ax=ax3, label='Semblance')

    cbar.outline.set_linewidth(.3)
    # Adjust the font size of the colorbar label
    cbar.set_label('Semblance', fontsize=3,labelpad=.8)
    
    # Adjust the font size of the colorbar tick labels
    cbar.ax.tick_params(labelsize=3, width=0.2, length=1.4,pad=.6)  # Adjust the labelsize as needed
    
    # Plot velocity function superimposed onto the contour plot
    mxi = np.argmax(semb, axis=1)
    mxv = semb[np.arange(len(semb)), mxi]
    velm = vel[mxi]
    ax3.plot(velm, t, 'w', linewidth=0.2)   # Plot velocity function in white color
    # ax3.tick_params(axis='both', which='major', labelsize=3)
    
    # Show the plot
    plt.show()



### Final NMO correction using velocity function
    final_nmc = f_nmo_correction(sgn, dt, offsets, velm)

    
### Plotting final NMO corrected CMP gather
    sn3 = np.zeros_like(final_nmc)
    for k in range(sz[1]):
        sn3[:,k] = final_nmc[:,k]+k*5.5

    fig3, ax4 = plt.subplots(figsize=(1.5, 1.5),dpi=400)
    ax4.plot(sn3[:, :sz[1]], t, 'k',linewidth=0.15)
    ax4.invert_yaxis()  # Reverse the y-axis
    ax4.tick_params(axis='both', which='major', labelsize=3, width=0.2, length=1.4,pad=.6)

    for axis in ['top','bottom','left','right']:
        ax4.spines[axis].set_linewidth(0.5)

    # Extract x-tick positions
    xtick_positions = np.mean(sn3[:, list(range(0,sz[1],8))], axis=0) #s2[:, [0, 9, 19, 29, 39, 49, 59]].mean().values
    # Set x-tick marks and labels for the bottom axes
    ax4.set_xticks(xtick_positions)
    ax4.set_xticklabels(list(range(1,sz[1]+1,8)))

    ax5 = ax4.twiny()

    # Set x-tick marks and labels for the top axes
    xtick_positions_top = np.mean(sn3[:, list(range(0,sz[1],8))], axis=0) #s2[:, [0, 9, 19, 29, 39, 49, 59]].mean().values
    ax5.set_xticks(xtick_positions_top)
    ax5.set_xticklabels(offsets.tolist()[0:sz[1]:8])

    for axis in ['top','bottom','left','right']:
        ax5.spines[axis].set_linewidth(0.5)
    
    ax5.set_xlim(ax1.get_xlim())
    ax5.tick_params(axis='both', which='major', labelsize=2.5, width=0.2, length=1.4,pad=.6)
    
    ax4.set_xlabel('Trace number', fontsize=3,labelpad=.9)
    ax5.set_xlabel('Offset from source (m)', fontsize=3,labelpad=.9)
    
    # Set label for y-axis
    ax4.set_ylabel('Time (s)', fontsize=3,labelpad=.9)

    plt.title('Final NMO Corrected CMP Gather', fontsize=4,pad=1)
    plt.show()


### CMP Stacking 
    stack = np.sum(final_nmc, axis=1, keepdims=True)
    stack_bal = balance_seismic_data(stack)
    cmp_stack.append(stack_bal)
1
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
50
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
99
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
148
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
197
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
246
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
295
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
344
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
393
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
442
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
491
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
540
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
589
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
638
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
687
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
736
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
785
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
834
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
883
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
932
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image

Updated velocity functions¶

In [282]:
def velf_gen(velm): # to define a velocity function
    velf=np.zeros_like(t)


    velf[0:int(0.5*250)] = 5 #velm[0:int(0.5*250)]
    velf[int(0.5*250):int(4*250)+1] = 1500 + (1600-1500)/(t[int(4*250)]-t[int(0.5*250)])*(t[int(0.5*250):int(4*250)+1]-t[int(0.5*250)])

    for i in range(int(4*250)+1,int(5*250)+1):
            velf[i] = velm[i] if velm[i] >= velf[i-1] and velm[i] < 2000 else velf[i-1]

    for i in range(int(5*250)+1,int(6*250)):
            velf[i] = velm[i] if velm[i] >= velf[i-1] and velm[i] < 2500 else velf[i-1]

    return velf    
In [271]:
file_semb = 'C:/Users/abdul/Downloads/Ejaz/sembs.xlsx' ## path where semblance analysis matrices are saved (sembs.xlsx). Check 
## VikingAT_CMPstack jupyter notebook
file_sgs = 'C:/Users/abdul/Downloads/Ejaz/sgs.xlsx' ## path where cmp gathers are saved (sgs.xlsx)
cmp_stack1=[]
vel = np.arange(350,4501,25)

for i in range(1,942,49):
    print(i)
    nsheet_name=f'sheet_{i}'

    sg, offsets = extsg(sg2,i)
    offsets=offsets.astype(int)
    # sg = pd.read_excel(file_sgs, sheet_name=nsheet_name)

    sg=sg.values
    # print('sg done')
    # print(sg[2,3])
    sz = sg.shape
    sgn = balance_seismic_data(sg) ## balancing the cmp gather


### Plotting normalized cmp gather
    
    sn2 = np.zeros_like(sgn)
    for k in range(sz[1]):
        sn2[:,k] = sgn[:,k] + k*5.5


    fig1, ax1 = plt.subplots(figsize=(1.5,1.5),dpi=400)
    ax1.plot(sn2[:, :sz[1]], t, 'k',linewidth=0.2)
    ax1.invert_yaxis()  # Reverse the y-axis
    ax1.tick_params(axis='both', which='major', labelsize=3, width=0.2, length=1.4,pad=.6)

    for axis in ['top','bottom','left','right']:
        ax1.spines[axis].set_linewidth(0.5)


    # Extract x-tick positions
    xtick_positions = np.mean(sn2[:, list(range(0,sz[1],8))], axis=0) 
    # Set x-tick marks and labels for the bottom axes
    ax1.set_xticks(xtick_positions)
    ax1.set_xticklabels(list(range(1,sz[1]+1,8)))
    

    ax2 = ax1.twiny()

    # Set x-tick marks and labels for the top axes
    xtick_positions_top = np.mean(sn2[:, list(range(0,sz[1],8))], axis=0) 
    ax2.set_xticks(xtick_positions_top)
    ax2.set_xticklabels(offsets.tolist()[0:sz[1]:8])
    
    ax2.set_xlim(ax1.get_xlim())
    ax2.tick_params(axis='both', which='major', labelsize=2.5, width=0.2, length=1.4,pad=.6)

    for axis in ['top','bottom','left','right']:
        ax2.spines[axis].set_linewidth(0.5)
    
    ax1.set_xlabel('Trace number', fontsize=3,labelpad=.9)
    ax2.set_xlabel('Offset from source (m)', fontsize=3,labelpad=.9)
    
    # Set label for y-axis
    ax1.set_ylabel('Time (s)', fontsize=3,labelpad=.9)
    plt.title('CMP Gather', fontsize=4,pad=1)

    plt.show()
    

### reading semblance values
    semb = pd.read_excel(file_semb, sheet_name=nsheet_name, header=None)
    semb=semb.values

### Plotting semblance contour

    fig2, ax3 = plt.subplots(figsize=(1.8, 1.5),dpi=400)  # Set DPI here
    
    contour = ax3.contourf(vel, t, semb/np.max(semb), cmap='magma')
    ax3.set_ylim(0, 6)
    ax3.set_xlim(350, 4500)
    ax3.invert_yaxis()  # Reverse the y-axis
    ax3.tick_params(axis='both', which='major', labelsize=3, width=0.2, length=1.4,pad=.6)
    

    for axis in ['top','bottom','left','right']:
        ax3.spines[axis].set_linewidth(0.5)
    
    ax3.set_ylabel('Time (s)', fontsize=3,labelpad=.9)
    ax3.set_xlabel('Velocity (m/s)', fontsize=3,labelpad=.9)
    plt.title('Semblance Contour Plot and Vel Func', fontsize=4,pad=1.2)
    
    # Add colorbar using the result of contourf as the mappable
    cbar = plt.colorbar(contour, ax=ax3, label='Semblance')

    cbar.outline.set_linewidth(.3)
    # Adjust the font size of the colorbar label
    cbar.set_label('Semblance', fontsize=3,labelpad=.8)
    
    # Adjust the font size of the colorbar tick labels
    cbar.ax.tick_params(labelsize=3, width=0.2, length=1.4,pad=.6)  # Adjust the labelsize as needed
    
    # Plot velocity function superimposed onto the contour plot
    

    
    mxi = np.argmax(semb, axis=1)
    velm = vel[mxi]

    # id1 = np.argmax(velm[int(0.3*250):int(1*250)]) + 75
    # v1, t1 = velm[id1], t[id1]
    
    # id2 = np.argmax(velm[int(3*250):int(5.5*250)]) + 750
    # v2, t2 = velm[id2], t[id2]

    # v2=np.mean(velm[int(3*250):int(4.7*250)])
    # t2=4
    
    # start_index = int(3 * 250)
    # end_index = int(4.7 * 250)
    
    # # Create a boolean mask to filter out values below 1600 within the specified range
    # mask = (velm[start_index:end_index] >= 1550)
    
    # # Calculate the mean using only the filtered values within the specified range
    # v2 = np.mean(velm[start_index:end_index][mask])
    
    # t2=4

    # velf = v1 + (v2-v1)/(t2-t1)*(t-t1)

    velf = velf_gen(velm)
    # velf=velm
    
    ax3.plot(velf, t, 'w', linewidth=0.2)   # Plot velocity function in white color
    # ax3.scatter([v1, v2], [t1, t2], c=['r', 'g'],s=5)
    # ax3.tick_params(axis='both', which='major', labelsize=3)
    
    # Show the plot
    plt.show()



### Final NMO correction using velocity function
    final_nmc = f_nmo_correction(sgn, dt, offsets, velf)

    
### Plotting final NMO corrected CMP gather
    sn3 = np.zeros_like(final_nmc)
    for k in range(sz[1]):
        sn3[:,k] = final_nmc[:,k]+k*5.5

    fig3, ax4 = plt.subplots(figsize=(1.5, 1.5),dpi=400)
    ax4.plot(sn3[:, :sz[1]], t, 'k',linewidth=0.25)
    ax4.invert_yaxis()  # Reverse the y-axis
    ax4.tick_params(axis='both', which='major', labelsize=3, width=0.2, length=1.4,pad=.6)

    for axis in ['top','bottom','left','right']:
        ax4.spines[axis].set_linewidth(0.5)

    # Extract x-tick positions
    xtick_positions = np.mean(sn3[:, list(range(0,sz[1],8))], axis=0) #s2[:, [0, 9, 19, 29, 39, 49, 59]].mean().values
    # Set x-tick marks and labels for the bottom axes
    ax4.set_xticks(xtick_positions)
    ax4.set_xticklabels(list(range(1,sz[1]+1,8)))

    ax5 = ax4.twiny()

    # Set x-tick marks and labels for the top axes
    xtick_positions_top = np.mean(sn3[:, list(range(0,sz[1],8))], axis=0) #s2[:, [0, 9, 19, 29, 39, 49, 59]].mean().values
    ax5.set_xticks(xtick_positions_top)
    ax5.set_xticklabels(offsets.tolist()[0:sz[1]:8])

    for axis in ['top','bottom','left','right']:
        ax5.spines[axis].set_linewidth(0.5)
    
    ax5.set_xlim(ax1.get_xlim())
    ax5.tick_params(axis='both', which='major', labelsize=2.5, width=0.2, length=1.4,pad=.6)
    
    ax4.set_xlabel('Trace number', fontsize=3,labelpad=.9)
    ax5.set_xlabel('Offset from source (m)', fontsize=3,labelpad=.9)
    
    # Set label for y-axis
    ax4.set_ylabel('Time (s)', fontsize=3,labelpad=.9)

    plt.title('Final NMO Corrected CMP Gather', fontsize=4,pad=1)
    plt.show()


### CMP Stacking 
    stack = np.sum(final_nmc, axis=1, keepdims=True)
    stack_bal = balance_seismic_data(stack)
    cmp_stack1.append(stack_bal)
1
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
50
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
99
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
148
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
197
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
246
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
295
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
344
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
393
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
442
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
491
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
540
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
589
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
638
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
687
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
736
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
785
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
834
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
883
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
932
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
In [272]:
off = np.array([0])
k=1
z=0
for i in range(50,942,49):
    idx = slist.index(i)
    # print('idx= ',idx)
    val = slistc[idx] - k
    # print('\n','val= ',val)
    off=np.append(off,off[z]+val*25)
    # print('\n',off)
    k=slistc[idx]
    z+=1
In [273]:
cmp_st = np.column_stack(cmp_stack1)

Gain function¶

In [274]:
def apply_exponential_gain(trace, exponent):
    """
    Apply exponential gain function to a single seismic trace.
    
    Parameters:
        trace (ndarray): Single seismic trace.
        exponent (float): Exponent parameter for the exponential gain function.
    
    Returns:
        ndarray: Seismic trace with exponential gain applied.
    """
    # Compute exponential gain
    gain = np.exp(exponent * t)
    
    # Apply gain to the trace
    trace_with_gain = trace * gain

    # trace_g = np.concatenate((trace_with_gain[0:int(3*250)+1],trace[int(3*250)+1:1500]),axis=0)
    
    return trace_with_gain

def apply_exponential_gain_to_stack(cmp_st, exponent):
    """
    Apply exponential gain function to each trace in a CMP stack.
    
    Parameters:
        cmp_stk (ndarray): CMP stacked traces with shape (number of samples, number of traces).
        exponent (float): Exponent parameter for the exponential gain function.
    
    Returns:
        ndarray: CMP stacked traces with exponential gain applied.
    """
    # Initialize array to store traces with gain applied
    cmp_stk_with_gain = np.zeros_like(cmp_st)
    
    # Apply gain function to each trace
    for i in range(cmp_st.shape[1]):
        cmp_stk_with_gain[:, i] = apply_exponential_gain(cmp_st[:, i], exponent)
    
    return cmp_stk_with_gain
In [278]:
# Parameters for exponential gain function
exponent = .6  # Adjust as needed

# Apply exponential gain function to CMP stack
cmp_stk_gain = apply_exponential_gain_to_stack(cmp_st, exponent)
In [279]:
cmp_stk = np.zeros_like(cmp_st)
for i in range((cmp_st.shape[1])):
    cmp_stk[:,i] = cmp_stk_gain[:,i]+i*15.5
In [280]:
fig, ax1 = plt.subplots(figsize=(1.5,1.5),dpi=400)
ax1.plot(cmp_stk[:, :cmp_stk.shape[1]], t, 'k',linewidth=0.15)
# ax1.set_ylim(0, 3)  # Set the lower and upper limits
ax1.invert_yaxis()  # Reverse the y-axis
ax1.invert_xaxis()



ax1.tick_params(axis='both', which='major', labelsize=3, width=0.2, length=1.4,pad=.6)

for axis in ['top','bottom','left','right']:
    ax1.spines[axis].set_linewidth(0.5)


# Extract x-tick positions
xtick_positions = np.mean(cmp_stk[:, list(range(0,cmp_stk.shape[1],3))], axis=0) #s2[:, [0, 9, 19, 29, 39, 49, 59]].mean().values
# Set x-tick marks and labels for the bottom axes
ax1.set_xticks(xtick_positions)
ax1.set_xticklabels(list(range(1,cmp_stk.shape[1],3)))
    

ax2 = ax1.twiny()

# Set x-tick marks and labels for the top axes
xtick_positions_top = np.mean(cmp_stk[:, list(range(0,cmp_stk.shape[1],3))], axis=0) #s2[:, [0, 9, 19, 29, 39, 49, 59]].mean().values
ax2.set_xticks(xtick_positions_top)
ax2.set_xticklabels(off.tolist()[0:cmp_stk.shape[1]:3])
    
ax2.set_xlim(ax1.get_xlim())
ax2.tick_params(axis='both', which='major', labelsize=2.5, width=0.2, length=1.4,pad=.6)

for axis in ['top','bottom','left','right']:
    ax2.spines[axis].set_linewidth(0.5)

ax1.set_xlabel('CMP stacked trace number', fontsize=3,labelpad=.9)
ax2.set_xlabel('CMP distance along survey line (m)', fontsize=3,labelpad=.9)

# Set label for y-axis
ax1.set_ylabel('Time (s)', fontsize=3,labelpad=.9)
plt.title('CMP Stack Traces', fontsize=4,pad=1)

plt.show()
No description has been provided for this image